Serge Demeyer | Publications | E-mail Feedback
Last updated on Thursday, November 16, 2023
@inproceedings{VanBladel2020SANER, author = {Brent van Bladel and Serge Demeyer}, booktitle = {Proceedings {SANER 2020} (International Conference on Software Analysis, Evolution and Reengineering)}, note = {Acceptance ratio: 46 / 199 = 23\%}, pages = {492--500}, publisher = {IEEE}, title = {Clone Detection in Test Code: an Empirical Evaluation}, year = {2020}, abstract = {Duplicated test code (a.k.a. test code clones) has a negative impact on test comprehension and maintenance. Moreover, the typical structure of unit test code induces structural similarity, increasing the amount of duplication. Yet, most research on software clones and clone detection tools is focused on production code, often ignoring test code. In this paper we will fill this gap by comparing four different clone detection tools (NiCad, CPD, iClones, TCORE) against the test code of three open-source projects. Our analysis confirms the prevalence of test code clones, as we observed between 23\% and 29\% test code duplication. We also show that most of the tools suffer from false negatives (NiCad = 83\%, CPD = 84\%, iClones = 21\%, TCORE = 65\%), which leaves ample room for improvement. These results indicate that further research on test clone detection is warranted.}, annote = {internationalconference}, doi = {10.1109/SANER48275.2020.9054798}, url = {https://figshare.com/collections/ Clone_Detection_in_Test_Code_an_Empirical_Evaluation/ 4710692}, }